Clean up SVM 32-bit exits.S file (remove all tsc/fpu).
authorkaf24@firebug.cl.cam.ac.uk <kaf24@firebug.cl.cam.ac.uk>
Wed, 1 Mar 2006 22:38:18 +0000 (23:38 +0100)
committerkaf24@firebug.cl.cam.ac.uk <kaf24@firebug.cl.cam.ac.uk>
Wed, 1 Mar 2006 22:38:18 +0000 (23:38 +0100)
Signed-off-by: Tom Woller <thomas.woller@amd.com>
xen/arch/x86/hvm/svm/x86_32/exits.S

index 7c6147269af6770dcd32399a6b99f9a5184a022c..4325b1443d3f392686faf9d0e334a4706e2ed3bc 100644 (file)
@@ -88,9 +88,6 @@
 #define STGI   .byte 0x0F,0x01,0xDC
 #define CLGI   .byte 0x0F,0x01,0xDD
 
-#define DO_TSC_OFFSET 0
-#define DO_FPUSAVE    0
-        
 ENTRY(svm_asm_do_launch)
         sti
         CLGI                
@@ -101,36 +98,6 @@ ENTRY(svm_asm_do_launch)
         movl VCPU_svm_hsa_pa(%ebx), %eax
         VMSAVE
 
-#if DO_FPUSAVE
-        mov      %cr0, %eax     
-        push %eax
-        clts
-        lea     VCPU_arch_guest_fpu_ctxt(%ebx), %eax
-        fxrstor (%eax)
-        pop      %eax
-        mov      %eax, %cr0
-#endif
-
-#if (DO_TSC_OFFSET)
-        pushl %edx /* eax and edx get trashed by rdtsc */
-        pushl %eax
-        rdtsc
-        subl VCPU_svm_vmexit_tsc(%ebx),%eax   /* tsc's from    */
-        sbbl VCPU_svm_vmexit_tsc+4(%ebx),%edx /* last #VMEXIT? */
-        subl %eax,VMCB_tsc_offset(%ecx)  /* subtract from running TSC_OFFSET */
-        sbbl %edx,VMCB_tsc_offset+4(%ecx)
-        subl $20000,VMCB_tsc_offset(%ecx)  /* fudge factor for VMXXX calls  */
-        sbbl $0,VMCB_tsc_offset+4(%ecx)
-
-        /* 
-         * TODO: may need to add a kludge factor to account for all the cycles 
-         * burned in VMLOAD, VMSAVE, VMRUN...
-         */
-
-        popl %eax
-        popl %edx
-        #endif
-
         movl VCPU_svm_vmcb_pa(%ebx), %eax
         popl %ebx
         popl %ecx
@@ -150,32 +117,8 @@ ENTRY(svm_asm_do_launch)
         VMSAVE
         /* eax is the only register we're allowed to touch here... */
 
-#if DO_FPUSAVE
-        mov  %cr0, %eax
-        push %eax
-        clts
-        GET_CURRENT(%eax)
-        lea     VCPU_arch_guest_fpu_ctxt(%eax), %eax
-        fxsave (%eax)
-        fnclex
-        pop  %eax
-        mov  %eax, %cr0
-#endif
-        
         GET_CURRENT(%eax)
 
-#if (DO_TSC_OFFSET)
-        pushl %edx
-        pushl %ebx
-        movl %eax,%ebx
-        rdtsc
-        movl %eax,VCPU_svm_vmexit_tsc(%ebx)
-        movl %edx,VCPU_svm_vmexit_tsc+4(%ebx)
-        movl %ebx,%eax
-        popl %ebx
-        popl %edx
-#endif
-
         movl VCPU_svm_hsa_pa(%eax), %eax
         VMLOAD